gusucode.com > 支持向量机工具箱 - LIBSVM OSU_SVM LS_SVM源码程序 > 支持向量机工具箱 - LIBSVM OSU_SVM LS_SVM\stprtool\svm\oaasmo.m

    function [model] = oaasmo(data,labels,ker,arg,C,epsilon,tolerance,info)
% OAASMO One-Against-All multi-class decomposition solved by SMO.
% [model] = oaasmo(X,I,ker,arg,C,epsilon,tolerance,info)
%
% OAASMO solves multi-class Support Vector Problem problem 
%  by the use of Sequential Minimal Optimizer.
%  The input K-class problem is decomposed onto 
%  K binary problems: i-th class against remaining (K-1)
%  classes. The binary problems are solved by the use 
%  of the SMO which results to the K decision rules.
%  
% Mandatory inputs:
%  data [NxL] contains L training N-dimensional patterns.
%  labels [1xL] labels of the patterns; integers from 1 to K.
%  ker [string] kernel identifier (see "help smoc").
%  arg [...] arguments of given kernel (see "help smoc").
%  C [1x1] trade-off between maximal margin and training error.
%
% Optional inputs:
%  epsilon [real] tolerance of KKT-conditions fulfilment 
%    (default is 0.001).
%  tolerance [real] minimal change of optimized Lagrangeians 
%    (default is 0.001).
%  info [int] if info is 1 then prints which decision rule is
%    being built (default is 0).
%
% Output: 
%  model [struct] contains found multi-class SVM classifier. 
%  
% For instance malpha{i} and mbias{i} are the multipliers and
% bias for i-th binary problem: i-th class against the others.
%
% See also OAACLASS, SMO, SVM.
%

% Statistical Pattern Recognition Toolbox, Vojtech Franc, Vaclav Hlavac
% (c) Czech Technical University Prague, http://cmp.felk.cvut.cz
% Modifications
%  2-Sep-2002, VF
%  21-Oct-2001, V.Franc
%  18-September-2001, V. Franc, created


% -- Processing of the input arguments ----------------

if nargin < 5,
   error('Not enough input arguments.');
end

if nargin < 6,
   epsilon = 0.001;
end

if nargin < 7,
   tolerance = 0.001;
end

if nargin < 8, 
  info = 0;
end


%---------------------------------

% get dimensions
[dim,num_data]=size(data);
num_classes=max(labels);

%---------------------------------

model.name = 'One-Against-All SVM classifier';
model.num_classes = num_classes;
model.num_rules = num_classes;
model.rule = cell(model.num_rules);
model.kercnt = 0;
model.btime = 0;

tmp_alpha=zeros(1,num_data);

for i=1:num_classes,

   if info==1,
     fprintf(1,'Building rule %d of %d\n', i,num_classes);
   end
  
   dich_labels=multi2dicho( labels, i );
   
   t=cputime;
   
   [Alpha,bias,nsv,kercnt]=...
       smo(data,dich_labels,ker,arg,C,epsilon,tolerance);
   
   model.btime= model.btime + cputime-t;
   model.kercnt = model.kercnt + kercnt;
   model.rule{i}.Alpha = Alpha;
   model.rule{i}.bias = bias;
   
   tmp_alpha = tmp_alpha+Alpha;
end

model.SVM.C = C;
model.SVM.kernel = ker;
model.SVM.arg = arg;

model.trn_data = data;
model.trn_labels = labels;
model.kercnt = kercnt;
%model.trnerr = ;

model.nsv = length(find(tmp_alpha));  

return;